Meta-analysis of avian responses and butterfly eyespots

Author

Mizuno et al.

Published

August 31, 2023

1 Setting up

Load packages

Code
pacman::p_load(ape,
               DT,
               here, 
               tidyverse, 
               metafor,
               miWQS,
               orchaRd,
               parallel,
               viridis)

2 prepare datasets for analysis

First, we used a predator dataset to check whether we need to consider phylogeney. If so, we will use predator and prey datasets for following meta-analysis.

Code
library(knitr)
dat_predator <- read.csv(here("data/predator_22072023.csv"), header = T, fileEncoding = "CP932")

dat_all <-  read.csv(here("data/all_15082023.csv"), header = T, fileEncoding = "CP932")


datatable(dat_predator, caption = "Predator datasets", options = list(pageLength = 10, scrollX = TRUE))

Table XX. Predator datasets

Code
trees <- read.nexus(here("data/bird_phy.nex"))
plot(trees[[1]],type = "fan")

Figure 1— Phylogenetic tree of bird species

Create function for calculating effect size and its variance.

Code
# function for calculating effect size (lnRR)
effect_lnRR <- function(dt) {
  # replace 0 in "C_mean", "T_sd", "C_sd", "C_proportion" with each minimum values
  # if proportion too extreme value, replace minimum value (only "T_proportion")

  dt <- dt %>%
    mutate(C_mean = ifelse(C_mean == 0, 0.04, C_mean))
  dt <- dt %>%
    mutate(T_sd = ifelse(T_sd == 0, 0.01, T_sd))
  dt <- dt %>%
    mutate(C_sd = ifelse(C_sd == 0, 0.05, C_sd))
  dt <- dt %>%
    mutate(C_proportion = ifelse(C_proportion == 0, 0.01, C_proportion))
  dt <- dt %>%
    mutate(T_proportion = ifelse(T_proportion < 0.01, 0.01, T_proportion))
  dt <- dt %>%
    mutate(T_proportion = ifelse(T_proportion  == 1, 0.9, T_proportion))
    
  # copy dataset for adding effect size and its variation (lnRR /lnRR_var) column
  dt1 <- dt %>%
    mutate(
      lnRR = NA,
      lnRR_var = NA
    )

  for (i in seq_len(nrow(dt1))) {
    Tn <- dt1$Tn[i]
    Cn <- dt1$Cn[i]
    T_mean <- dt1$T_mean[i]
    C_mean <- dt1$C_mean[i]
    T_proportion <- dt1$T_proportion[i]
    C_proportion <- dt1$C_proportion[i]
    T_sd <- dt1$T_sd[i]
    C_sd <- dt1$C_sd[i]
    Response <- dt1$Response[i]
    Measure <- dt1$Measure[i]
    Study_design <- dt1$Study_design[i]
    Direction <- dt1$Direction[i]

    # continuous data - using escalc() (metafor package)
    if (Response == "continuous" & Study_design == "independent") {

      effect <- escalc(
        measure = "ROM",
        n1i = Tn, n2i = Cn,
        m1i = T_mean, m2 = C_mean,
        sd1i = T_sd, sd2i = C_sd,
        var.names = c("lnRR", "lnRR_var")
      )

      dt1$lnRR[i] <- effect$lnRR
      dt1$lnRR_var[i] <- effect$lnRR_var
    } else if (Response == "continuous" & Study_design == "dependent") {

      effect <- escalc(
        measure = "ROMC",
        ni = (Tn + Cn) / 2,
        m1i = T_mean, m2 = C_mean,
        sd1i = T_sd, sd2i = C_sd,
        ri = 0.5,
        var.names = c("lnRR", "lnRR_var")
      )

      dt1$lnRR[i] <- effect$lnRR
      dt1$lnRR_var[i] <- effect$lnRR_var
    }

    # proportion data (no sd values)
    else if (Response == "proportion1" & Study_design == "independent") {
      
      T_proportion <- replace(
        T_proportion, Direction == "Decrease",
        (1 - T_proportion[Direction == "Decrease"])
      )
      C_proportion <- replace(
        C_proportion, Direction == "Decrease",
        (1 - C_proportion[Direction == "Decrease"])
      )

      # transform proportion value
      asin_trans <- function(proportion) {
        trans <- asin(sqrt(proportion))
        trans
      }

      T_proportion <- asin_trans(T_proportion)
      C_proportion <- asin_trans(C_proportion)

      # calculate lnRR and lnRR variance
      lnRR_pro1 <- log(T_proportion / C_proportion)
      lnRR_var_pro1 <- (1 / sqrt(8))^2 * (1 / (T_proportion^2 * Tn) +
        1 / (C_proportion^2 * Cn))

      dt1$lnRR[i] <- lnRR_pro1
      dt1$lnRR_var[i] <- lnRR_var_pro1

    } else if (Response == "proportion1" & Study_design == "dependent") {
      
      T_proportion <- replace(
        T_proportion, Direction == "Decrease",
        (1 - T_proportion[Direction == "Decrease"])
      )
      C_proportion <- replace(
        C_proportion, Direction == "Decrease",
        (1 - C_proportion[Direction == "Decrease"])
      )

      # transform proportion value
      asin_trans <- function(proportion) {
        trans <- asin(sqrt(proportion))
        trans
      }

      T_proportion <- asin_trans(T_proportion)
      C_proportion <- asin_trans(C_proportion)

      # calculate lnRR and lnRR variance
      lnRR_pro1 <- log(T_proportion / C_proportion)
      lnRR_var_pro1 <- (1 / sqrt(8))^2 * (1 / (T_proportion^2 * Tn) +
        1 / (C_proportion^2 * Cn)) -
        2 * 0.5 * sqrt((1 / sqrt(8))^2 * (1 / (T_proportion^2 * Tn))) *
          sqrt((1 / sqrt(8))^2 * (1 / (C_proportion^2 * Cn)))

      dt1$lnRR[i] <- lnRR_pro1
      dt1$lnRR_var[i] <- lnRR_var_pro1
    }

    # proportion data (exist sd values)
    else if (Response == "proportion2" & Study_design == "independent") {
      
        T_proportion <- replace(
        T_proportion, Direction == "Decrease",
        (1 - T_proportion[Direction == "Decrease"])
      )
      C_proportion <- replace(
        C_proportion, Direction == "Decrease",
        (1 - C_proportion[Direction == "Decrease"])
      )
      
      # transform proportion mean value
      asin_trans <- function(proportion) {
        trans <- asin(sqrt(proportion))
        trans
      }

      T_SD <- T_sd^2 / (4 * (T_proportion) * (1 - (T_proportion)))
      C_SD <- C_sd^2 / (4 * (C_proportion) * (1 - (C_proportion)))

      T_proportion <- asin_trans(T_proportion)
      C_proportion <- asin_trans(C_proportion)

      # calculate lnRR and lnRR variance
      lnRR_pro2 <- log(T_proportion / C_proportion)
      lnRR_var_pro2 <- (T_SD)^2 * (1 / (T_proportion^2 * Tn)) +
        (C_SD)^2 * (1 / (C_proportion^2 * Cn))

      dt1$lnRR[i] <- lnRR_pro2
      dt1$lnRR_var[i] <- lnRR_var_pro2

    } else if (Response == "proportion2" & Study_design == "dependent") {
      
        T_proportion <- replace(
        T_proportion, Direction == "Decrease",
        (1 - T_proportion[Direction == "Decrease"])
      )
      C_proportion <- replace(
        C_proportion, Direction == "Decrease",
        (1 - C_proportion[Direction == "Decrease"])
      )
      
      # transform proportion mean value
      asin_trans <- function(proportion) {
        trans <- asin(sqrt(proportion))
        trans
      }

      T_SD <- T_sd^2 / (4 * (T_proportion) * (1 - (T_proportion)))
      C_SD <- C_sd^2 / (4 * (C_proportion) * (1 - (C_proportion)))

      T_proportion <- asin_trans(T_proportion)
      C_proportion <- asin_trans(C_proportion)

      # calculate lnRR and lnRR variance
      lnRR_pro2 <- log(T_proportion / C_proportion)
      lnRR_var_pro2 <- (T_SD)^2 * (1 / (T_proportion^2 * Tn)) +
        (C_SD)^2 * (1 / (C_proportion^2 * Cn)) -
        2 * 0.5 * sqrt((T_SD)^2 * (1 / (T_proportion^2 * Tn))) *
          sqrt((C_SD)^2 * (1 / (C_proportion^2 * Cn)))

      dt1$lnRR[i] <- lnRR_pro2
      dt1$lnRR_var[i] <- lnRR_var_pro2
    }
  }

  return(dt1)
}
Code
dat_pred <- effect_lnRR(dat_predator)
dat <- effect_lnRR(dat_all)

# add obseravation id
dat_pred$Obs_ID <- 1:nrow(dat_pred)
dat$Obs_ID <- 1:nrow(dat)

# the data of diamete, area, and background are log-transformed because it is skew data
dat$Log_diameter <- log(dat$Diameter_pattern)
dat$Log_area <- log(dat$Area_pattern)
dat$Log_background <- log(dat$Area_background)


# use vcalc to calculate variance-covariance matrix
VCV_pred <- vcalc(vi = lnRR_var, 
                  cluster = Cohort_ID, 
                  subgroup = Obs_ID, 
                  data = dat_pred)

VCV <- vcalc(vi = lnRR_var, 
             cluster = Cohort_ID,
             obs = Obs_ID,
             rho = 0.5,
             data = dat)

BUG - I cannot attach the caption to datatable() format table. I need to use kable() format table?

Code
datatable(dat, caption = "Dataset for meta-analysis", 
          options = list(pageLength = 10, scrollX = TRUE))

** Table XX. Dataset for meta-analysis **

If phylogeny do not explain heterogeniety much, we will not need to consider it and the two datasets can be merged.

We used meta-analytical models to calculate total I2 (a measure of heterogeneity not caused by sampling error; Higgins. et al., 2003) and the partial I2 explained by each random factor.

Based on this, we need not to consider phylogenetic relatedness from our meta-regressions as this random factor did not explain much of the heterogeneity between effect sizes (partial I2 < 0.001%), then we can combine two datasets (predator and prey) for meta-analysis.

Code
phy_model <- function(cor_tree = vcv_tree){
            model <- rma.mv(yi = lnRR,
                            V = VCV_pred,
                            random = list(~1 | Study_ID,
                                          ~1 | Shared_control_ID,
                                          ~1 | Cohort_ID,
                                          ~1 | Obs_ID,
                                          ~1 | Bird_species,
                                          ~1 | Phylogeny),
                            R = list(Phylogeny = cor_tree),
                            test = "t",
                            method = "REML",
                            sparse = TRUE,
                            data = dat_pred)
  model
}

tree_50 <- trees[1:50]
vcv_tree_50 <- map(tree_50, ~vcv(.x, corr = TRUE))


# Run multiple meta-analyses with 50 different trees and obtain the combined result


ma_50 <- mclapply(vcv_tree_50, phy_model, mc.cores = 8) 
Code
ma_50 <- readRDS(here("Rdata", "ma_50.RDS"))

# combining the results
est_50 <- map_dbl(ma_50, ~ .x$b[[1]])
se_50 <-  map_dbl(ma_50, ~ .x$se)
df_50 <- c(rbind(est_50, se_50))

# creating an array required by pool.mi
my_array <- array(df_50, dim = c(1, 2, 50))

com_res <- round(pool.mi(my_array), 4)
Code
knitr::kable(com_res, caption = "Combined result of 50 phylogenetic trees")
Combined result of 50 phylogenetic trees
pooled.mean pooled.total.se se.within se.between relative.inc.var frac.miss.info CI.1 CI.2 p.value
0.1394 0.1192 0.1192 0 0 0 -0.0943 0.3731 0.2424
Code
sigma2_mod <- do.call(rbind, lapply(ma_50, function(x) x$sigma2))
sigma2_mod <- data.frame(sigma2_mod)

colnames(sigma2_mod) <- c("sigma^2.1_Study_ID", "sigma^2.2_SharedControl_ID", 
                          "sigma^2.3_Cohort_ID", "sigma^2.4_Obs_ID", 
                          "sigma^2.5_BirdSpecies", "sigma^2.6_Phylogeny")

sigma2_mod <- round(colMeans(sigma2_mod), 4)

knitr::kable(sigma2_mod, caption = "Average variance components")
Average variance components
x
sigma^2.1_Study_ID 0.0000
sigma^2.2_SharedControl_ID 0.0923
sigma^2.3_Cohort_ID 0.1009
sigma^2.4_Obs_ID 0.5323
sigma^2.5_BirdSpecies 0.0000
sigma^2.6_Phylogeny 0.0000
We got 1000 phylogenetic trees from BirdTree.org

Only 50 trees are used as in Nakagawa & Villemereuil (2019)

3 Meta-analysis

We used meta-analytical models to calculate total I2 (a measure of heterogeneity not caused by sampling error; Higgins. et al., 2003) and the partial I2 explained by each random factor.

Which random effects remove in our models?

We can remove Shared control ID and Cohort ID

Code
# I exclude cohort_ID because sigma^2.2 = 0 and I2 = 0
ma_all <- rma.mv(yi = lnRR,
                  V = VCV, 
                  random = list(~1 | Study_ID,
                                ~1 | Shared_control_ID,
                                ~1 | Cohort_ID,
                                ~1 | Obs_ID),
                  test = "t",
                  method = "REML", 
                  sparse = TRUE,
                  data = dat)

summary(ma_all)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-248.2436   496.4871   506.4871   524.3289   506.7215   

Variance Components:

            estim    sqrt  nlvls  fixed             factor 
sigma^2.1  0.0490  0.2214     32     no           Study_ID 
sigma^2.2  0.0000  0.0000     88     no  Shared_control_ID 
sigma^2.3  0.0000  0.0000    157     no          Cohort_ID 
sigma^2.4  0.2423  0.4923    263     no             Obs_ID 

Test for Heterogeneity:
Q(df = 262) = 6322.6734, p-val < .0001

Model Results:

estimate      se    tval   df    pval   ci.lb   ci.ub      
  0.2086  0.0597  3.4913  262  0.0006  0.0909  0.3262  *** 

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
i2 <- round(i2_ml(ma_all), 4)

Heterogeneity is high (I2 = 0.95) and significant (Q = 100.5, df = 9, p < 0.001)

I2_Total I2_Study_ID I2_Shared_control_ID I2_Cohort_ID I2_Obs_ID
99.8442 16.7983 0 0 83.0458

Table XX. Heterogeneity of effect size

Code
orchard_plot(ma_all,
              group = "Study_ID",
              xlab = "log response ratio (lnRR)", 
              angle = 45) + 
              scale_x_discrete(labels = c("Overall effect")) + 
              scale_fill_viridis_d()

Estimates of overall effect size and 95% confidence intervals

4 Meta-regressions: uni-moderator

Eyespot vs. conspicuous patterns - Is there significant difference of effect size between two patterns?

Code
#normal model
mr_eyespot <- rma.mv(yi = lnRR,
                    V = VCV, 
                    mods = ~ Treatment_stimulus,
                    random = list(~1 | Study_ID,
                                  ~1 | Obs_ID),
                    test = "t",
                    method = "REML", 
                    sparse = TRUE,
                    data = dat)

summary(mr_eyespot)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-247.4652   494.9305   502.9305   517.1886   503.0867   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0534  0.2312     32     no  Study_ID 
sigma^2.2  0.2426  0.4926    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6290.4224, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 0.1628, p-val = 0.6869

Model Results:

                           estimate      se    tval   df    pval    ci.lb 
intrcpt                      0.1783  0.0980  1.8199  261  0.0699  -0.0146 
Treatment_stimuluseyespot    0.0442  0.1096  0.4035  261  0.6869  -0.1716 
                            ci.ub    
intrcpt                    0.3712  . 
Treatment_stimuluseyespot  0.2601    

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
i2_1 <- round(i2_ml(mr_eyespot), 4)
I2_Total I2_Study_ID I2_Obs_ID
99.8466 18.0186 81.828

Table XX. Heterogeneity of effect size between eyespot and conspicuous patterns

Code
orchard_plot(mr_eyespot,
            mod = "Treatment_stimulus",
            group = "Study_ID",
            xlab = "log response ratio (lnRR)",
            angle = 45) + 
            scale_colour_viridis_d(option = "plasma")

Figure 2— Eyespot vs. conspicuous patterns
Code
# intercept-removed model
mr_eyespot1 <- rma.mv(yi = lnRR,
                      V = VCV, 
                      mods = ~ Treatment_stimulus -1,
                      random = list(~1 | Study_ID,
                                    ~1 | Obs_ID),
                      test = "t",
                      method = "REML", 
                      sparse = TRUE,
                      data = dat)

summary(mr_eyespot1)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-247.4652   494.9305   502.9305   517.1886   503.0867   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0534  0.2312     32     no  Study_ID 
sigma^2.2  0.2426  0.4926    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6290.4224, p-val < .0001

Test of Moderators (coefficients 1:2):
F(df1 = 2, df2 = 261) = 5.9203, p-val = 0.0031

Model Results:

                               estimate      se    tval   df    pval    ci.lb 
Treatment_stimulusconspicuous    0.1783  0.0980  1.8199  261  0.0699  -0.0146 
Treatment_stimuluseyespot        0.2225  0.0696  3.1974  261  0.0016   0.0855 
                                ci.ub     
Treatment_stimulusconspicuous  0.3712   . 
Treatment_stimuluseyespot      0.3596  ** 

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
mr_diameter <- rma.mv(yi = lnRR,
                      V = VCV, 
                      mods = ~ Log_diameter,
                      random = list(~1 | Study_ID,
                                    ~1 | Obs_ID),
                      test = "t",
                      method = "REML", 
                      sparse = TRUE,
                      data = dat)

summary(mr_diameter)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-244.4086   488.8173   496.8173   511.0753   496.9735   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0440  0.2099     32     no  Study_ID 
sigma^2.2  0.2372  0.4870    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6288.3226, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 6.0358, p-val = 0.0147

Model Results:

              estimate      se     tval   df    pval    ci.lb   ci.ub    
intrcpt        -0.1679  0.1634  -1.0274  261  0.3052  -0.4897  0.1539    
Log_diameter    0.1945  0.0792   2.4568  261  0.0147   0.0386  0.3504  * 

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
bubble_plot(mr_diameter,
            mod = "Log_diameter",
            group = "Study_ID",
            xlab = "Log-transformed diameter")

Figure 3— Effect size and pattern diameter
Code
mr_area <- rma.mv(yi = lnRR,
                  V = VCV, 
                  mods = ~ Log_area,
                  random = list(~1 | Study_ID,
                                ~1 | Obs_ID),
                  test = "t",
                  method = "REML", 
                  sparse = TRUE,
                  data = dat)

summary(mr_area)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-243.8857   487.7714   495.7714   510.0295   495.9277   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0483  0.2199     32     no  Study_ID 
sigma^2.2  0.2351  0.4849    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6283.7400, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 6.9863, p-val = 0.0087

Model Results:

          estimate      se     tval   df    pval    ci.lb   ci.ub     
intrcpt    -0.1849  0.1601  -1.1554  261  0.2490  -0.5001  0.1302     
Log_area    0.1106  0.0419   2.6432  261  0.0087   0.0282  0.1931  ** 

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
bubble_plot(mr_area,
            mod = "Log_area",
            group = "Study_ID",
            xlab = "Log-transformed area")

Figure 4— Effect size and pattern area
Code
mr_num <- rma.mv(yi = lnRR,
                  V = VCV,
                  mods = ~ Number_pattern,
                  random = list(~1 | Study_ID,
                                ~1 | Shared_control_ID,
                                ~1 | Obs_ID),
                      test = "t",
                      method = "REML",
                      sparse = TRUE,
                      data = dat)

summary(mr_num)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-245.6558   491.3117   501.3117   519.1343   501.5470   

Variance Components:

            estim    sqrt  nlvls  fixed             factor 
sigma^2.1  0.0468  0.2164     32     no           Study_ID 
sigma^2.2  0.0000  0.0000     88     no  Shared_control_ID 
sigma^2.3  0.2386  0.4885    263     no             Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6307.0850, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 4.4504, p-val = 0.0358

Model Results:

                estimate      se     tval   df    pval    ci.lb    ci.ub      
intrcpt           0.3453  0.0877   3.9376  261  0.0001   0.1726   0.5180  *** 
Number_pattern   -0.0579  0.0274  -2.1096  261  0.0358  -0.1119  -0.0039    * 

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
bubble_plot(mr_num,
            mod = "Number_pattern",
            group = "Study_ID",
            xlab = "Number of patterns") 

Figure 5— Effect size and number of patterns

Our dataset includes two types of stimuli: live and artificial. Is there significant difference of effect size between two stimuli?

Code
# normal model
mr_prey_type <- rma.mv(yi = lnRR,
                  V = VCV, 
                  mods = ~ Type_prey,
                  random = list(~1 | Study_ID,
                                ~1 | Obs_ID),
                  test = "t",
                  method = "REML", 
                  sparse = TRUE,
                  data = dat)

summary(mr_prey_type)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-247.2961   494.5922   502.5922   516.8502   502.7484   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0566  0.2380     32     no  Study_ID 
sigma^2.2  0.2415  0.4914    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6259.1358, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 0.3325, p-val = 0.5647

Model Results:

               estimate      se    tval   df    pval    ci.lb   ci.ub    
intrcpt          0.1845  0.0759  2.4304  261  0.0158   0.0350  0.3339  * 
Type_preyreal    0.0763  0.1324  0.5767  261  0.5647  -0.1843  0.3370    

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
orchard_plot(mr_prey_type,
              mod = "Type_prey",
              group = "Study_ID",
              xlab = "Type of prey",
              angle = 45) +
              scale_colour_viridis_d(option = "plasma")

# intercept-removed model
mr_prey_type1 <- rma.mv(yi = lnRR,
                        V = VCV, 
                        mods = ~ Type_prey,
                        random = list(~1 | Study_ID,
                                      ~1 | Obs_ID),
                        test = "t",
                        method = "REML", 
                        sparse = TRUE,
                        data = dat)

summary(mr_prey_type1)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-247.2961   494.5922   502.5922   516.8502   502.7484   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0566  0.2380     32     no  Study_ID 
sigma^2.2  0.2415  0.4914    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6259.1358, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 0.3325, p-val = 0.5647

Model Results:

               estimate      se    tval   df    pval    ci.lb   ci.ub    
intrcpt          0.1845  0.0759  2.4304  261  0.0158   0.0350  0.3339  * 
Type_preyreal    0.0763  0.1324  0.5767  261  0.5647  -0.1843  0.3370    

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Figure 6— Effect size and prey type - real vs. artificial

Our dataset includes 4 types of prey type: real butterfly, abstract butterfly, artificial prey. Is there significant difference of effect size between two stimuli?

Code
mr_prey_shape <- rma.mv(yi = lnRR,
                        V = VCV, 
                        mods = ~ Shape_prey - 1,
                        random = list(~1 | Study_ID,
                                      ~1 | Shared_control_ID,
                                      ~1 | Obs_ID),
                                      test = "t",
                                      method = "REML", 
                                      sparse = TRUE,
                                      data = dat)

summary(mr_prey_shape)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-243.5524   487.1049   501.1049   526.0027   501.5511   

Variance Components:

            estim    sqrt  nlvls  fixed             factor 
sigma^2.1  0.0560  0.2366     32     no           Study_ID 
sigma^2.2  0.0000  0.0000     88     no  Shared_control_ID 
sigma^2.3  0.2392  0.4890    263     no             Obs_ID 

Test for Residual Heterogeneity:
QE(df = 259) = 6062.6887, p-val < .0001

Test of Moderators (coefficients 1:4):
F(df1 = 4, df2 = 259) = 3.7576, p-val = 0.0054

Model Results:

                              estimate      se    tval   df    pval    ci.lb 
Shape_preyabstract_butterfly    0.3222  0.1078  2.9889  259  0.0031   0.1099 
Shape_preyabstract_stimuli      0.0208  0.1868  0.1112  259  0.9115  -0.3470 
Shape_preybutterfly             0.2604  0.1080  2.4120  259  0.0166   0.0478 
Shape_preycaterpillar           0.0663  0.1283  0.5164  259  0.6060  -0.1864 
                               ci.ub     
Shape_preyabstract_butterfly  0.5345  ** 
Shape_preyabstract_stimuli    0.3886     
Shape_preybutterfly           0.4731   * 
Shape_preycaterpillar         0.3190     

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
orchard_plot(mr_prey_shape,
              mod = "Shape_prey",
              group = "Study_ID",
              xlab = "Shape of prey",
              angle = 45) +
              scale_colour_viridis_d()

Figure 7— Effect size and prey shape

5 Meta-regressions: multi-moderator

Include all moderators in the model.

Code
mr_all <- rma.mv(yi = lnRR,
                V = VCV, 
                mods = ~ Treatment_stimulus +
                Log_diameter + Log_background + 
                Log_area + Number_pattern + 
                Type_prey + Shape_prey,
                random = list(~1 | Study_ID,
                              ~1 | Obs_ID),
                test = "t",
                method = "REML", 
                sparse = TRUE,
                data = dat)

summary(mr_all)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-229.8962   459.7924   481.7924   520.7031   482.8833   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0157  0.1254     32     no  Study_ID 
sigma^2.2  0.2283  0.4778    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 254) = 5679.7577, p-val < .0001

Test of Moderators (coefficients 2:9):
F(df1 = 8, df2 = 254) = 3.9645, p-val = 0.0002

Model Results:

                            estimate      se     tval   df    pval    ci.lb 
intrcpt                      -0.1002  0.5052  -0.1983  254  0.8429  -1.0950 
Treatment_stimuluseyespot     0.1602  0.1175   1.3631  254  0.1741  -0.0713 
Log_diameter                 -0.0840  0.3392  -0.2478  254  0.8045  -0.7520 
Log_background               -0.0683  0.0823  -0.8301  254  0.4073  -0.2304 
Log_area                      0.2917  0.1841   1.5850  254  0.1142  -0.0707 
Number_pattern               -0.0218  0.0297  -0.7322  254  0.4647  -0.0803 
Type_preyreal                -0.0949  0.1377  -0.6889  254  0.4915  -0.3661 
Shape_preyabstract_stimuli   -0.8388  0.2629  -3.1909  254  0.0016  -1.3565 
Shape_preycaterpillar        -0.1400  0.1372  -1.0206  254  0.3084  -0.4102 
                              ci.ub     
intrcpt                      0.8947     
Treatment_stimuluseyespot    0.3917     
Log_diameter                 0.5840     
Log_background               0.0938     
Log_area                     0.6542     
Number_pattern               0.0368     
Type_preyreal                0.1764     
Shape_preyabstract_stimuli  -0.3211  ** 
Shape_preycaterpillar        0.1302     

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Only include moderators related to conspicuousness (pattern diameter, pattern area, number of patterns, and background area) in the model.

Code
mr_cons <- rma.mv(yi = lnRR,
                V = VCV, 
                mods = ~ Treatment_stimulus +
                Log_diameter + Log_background + 
                Log_area + Number_pattern,
                random = list(~1 | Study_ID,
                              ~1 | Obs_ID),
                test = "t",
                method = "REML", 
                sparse = TRUE,
                data = dat)

summary(mr_cons)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-236.1399   472.2798   488.2798   516.6724   488.8605   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0352  0.1876     32     no  Study_ID 
sigma^2.2  0.2353  0.4850    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 257) = 6122.3387, p-val < .0001

Test of Moderators (coefficients 2:6):
F(df1 = 5, df2 = 257) = 3.7036, p-val = 0.0030

Model Results:

                           estimate      se     tval   df    pval    ci.lb 
intrcpt                      1.1087  0.4283   2.5887  257  0.0102   0.2653 
Treatment_stimuluseyespot    0.0188  0.1146   0.1645  257  0.8695  -0.2067 
Log_diameter                -0.1729  0.3644  -0.4745  257  0.6356  -0.8905 
Log_background              -0.2314  0.0765  -3.0254  257  0.0027  -0.3820 
Log_area                     0.3025  0.1983   1.5250  257  0.1285  -0.0881 
Number_pattern              -0.0202  0.0292  -0.6921  257  0.4895  -0.0777 
                             ci.ub     
intrcpt                     1.9521   * 
Treatment_stimuluseyespot   0.2444     
Log_diameter                0.5447     
Log_background             -0.0808  ** 
Log_area                    0.6930     
Number_pattern              0.0373     

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

6 Publication bias

ASK: Is this correct? Also, how do I number sub-captions correctly?

Code
# plot on the left
funnel(ma_all, yaxis = "sei", xlab = "Effect size (lnRR)", ylab = "Standered error" )

# plot on the right
funnel(ma_all, yaxis = "seinv", xlab = "Effect size (lnRR)", ylab = "Inverese standered error")

Figure 8— Effect size and its standard error

Figure 9— Effect size and its inverse standard error

Figure 10— Funnel plot of effect size and its standard error

Code
df_bias <- dat %>% mutate(sqrt_inv_e_n = sqrt((Cn + Tn)/(Cn * Tn)))

bias_model <- rma.mv(yi = lnRR,
                      V = lnRR_var, 
                      mods = ~ 1 + sqrt_inv_e_n,
                      random = list(~1 | Study_ID,
                                    ~1 | Obs_ID),
                      test = "t",
                      method = "REML", 
                      sparse = TRUE,
                      data = df_bias)

summary(bias_model)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-245.8643   491.7285   499.7285   513.9866   499.8848   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0959  0.3096     32     no  Study_ID 
sigma^2.2  0.2159  0.4647    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 4119.2520, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 0.0088, p-val = 0.9254

Model Results:

              estimate      se     tval   df    pval    ci.lb   ci.ub    
intrcpt         0.2428  0.1380   1.7601  261  0.0796  -0.0288  0.5145  . 
sqrt_inv_e_n   -0.0363  0.3879  -0.0937  261  0.9254  -0.8001  0.7274    

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
bubble_plot(bias_model,
            mod = "sqrt_inv_e_n",
            group = "Study_ID",
            xlab = "Square root of inverse of effective sample size")

Figure 11— Egger’s test of publication bias
Code
year_model <- rma.mv(yi = lnRR,
                      V = VCV, 
                      mods = ~  1 + Year,
                      random = list(~1 | Study_ID,
                                    ~1 | Obs_ID),
                      test = "t",
                      method = "REML", 
                      sparse = TRUE,
                      data = df_bias)

summary(year_model)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-247.3645   494.7289   502.7289   516.9870   502.8852   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0520  0.2281     32     no  Study_ID 
sigma^2.2  0.2428  0.4927    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6249.4316, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 0.0057, p-val = 0.9399

Model Results:

         estimate       se     tval   df    pval     ci.lb    ci.ub    
intrcpt    1.1904  12.9935   0.0916  261  0.9271  -24.3950  26.7758    
Year      -0.0005   0.0065  -0.0755  261  0.9399   -0.0132   0.0122    

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
bubble_plot(year_model,
            mod = "Year",
            group = "Study_ID",
            xlab = "Year of publication")

Figure 12— Decline effect

7 Additional analyses

We compared effect size between two datasets (predator and prey) to see whether there is any difference in effect size between two datasets.

Code
mr_dataset <- rma.mv(yi = lnRR,
                      V = VCV,
                      mods = ~ Dataset,
                      random = list(~1 | Study_ID,
                                    ~1 | Obs_ID),
                      test = "t",
                      method = "REML",
                      sparse = TRUE,
                      data = dat)

summary(mr_dataset)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-247.0730   494.1459   502.1459   516.4040   502.3022   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0522  0.2285     32     no  Study_ID 
sigma^2.2  0.2420  0.4919    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6222.4453, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 0.5964, p-val = 0.4406

Model Results:

             estimate      se    tval   df    pval    ci.lb   ci.ub    
intrcpt        0.1598  0.0880  1.8162  261  0.0705  -0.0135  0.3331  . 
Datasetprey    0.0919  0.1191  0.7723  261  0.4406  -0.1425  0.3264    

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
orchard_plot(mr_dataset,
            mod = "Dataset",
            group = "Study_ID",
            xlab = "Dataset", 
            angle = 45) 

Figure 13— Effect size and dataset - predator and prey
Code
mr_background <- rma.mv(yi = lnRR,
                        V = VCV, 
                        mods = ~ Log_background,
                        random = list(~1 | Study_ID,
                                      ~1 | Obs_ID),
                        test = "t",
                        method = "REML",
                        sparse = TRUE,
                        data = dat)

summary(mr_background)

Multivariate Meta-Analysis Model (k = 263; method: REML)

   logLik   Deviance        AIC        BIC       AICc   
-247.0585   494.1170   502.1170   516.3751   502.2732   

Variance Components:

            estim    sqrt  nlvls  fixed    factor 
sigma^2.1  0.0556  0.2358     32     no  Study_ID 
sigma^2.2  0.2421  0.4920    263     no    Obs_ID 

Test for Residual Heterogeneity:
QE(df = 261) = 6234.1356, p-val < .0001

Test of Moderators (coefficient 2):
F(df1 = 1, df2 = 261) = 0.2144, p-val = 0.6437

Model Results:

                estimate      se     tval   df    pval    ci.lb   ci.ub    
intrcpt           0.4058  0.4286   0.9468  261  0.3446  -0.4382  1.2498    
Log_background   -0.0282  0.0609  -0.4631  261  0.6437  -0.1480  0.0917    

---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
Code
bubble_plot(mr_background,
            mod = "Log_background",
            group = "Study_ID",
            xlab = "Log-transformed backgroud")

Figure 14— Effect size and background area

8 R session infomation

R version 4.3.1 (2023-06-16)

Platform: x86_64-apple-darwin20 (64-bit)

locale: en_US.UTF-8||en_US.UTF-8||en_US.UTF-8||C||en_US.UTF-8||en_US.UTF-8

attached base packages: parallel, stats, graphics, grDevices, utils, datasets, methods and base

other attached packages: knitr(v.1.43), viridis(v.0.6.4), viridisLite(v.0.4.2), orchaRd(v.2.0), miWQS(v.0.4.4), metafor(v.4.2-0), numDeriv(v.2016.8-1.1), metadat(v.1.2-0), Matrix(v.1.6-1), lubridate(v.1.9.2), forcats(v.1.0.0), stringr(v.1.5.0), dplyr(v.1.1.2), purrr(v.1.0.2), readr(v.2.1.4), tidyr(v.1.3.0), tibble(v.3.2.1), ggplot2(v.3.4.3), tidyverse(v.2.0.0), here(v.1.0.1), DT(v.0.28) and ape(v.5.7-1)

loaded via a namespace (and not attached): gridExtra(v.2.3), glm2(v.1.2.1), sandwich(v.3.0-2), rlang(v.1.1.1), magrittr(v.2.0.3), compiler(v.4.3.1), mgcv(v.1.8-42), vctrs(v.0.6.3), quantreg(v.5.97), pkgconfig(v.2.0.3), fastmap(v.1.1.1), backports(v.1.4.1), mcmc(v.0.9-7), ellipsis(v.0.3.2), labeling(v.0.4.2), pander(v.0.6.5), utf8(v.1.2.3), rmarkdown(v.2.24), tzdb(v.0.4.0), ggbeeswarm(v.0.7.2), MatrixModels(v.0.5-2), xfun(v.0.40), cachem(v.1.0.8), jsonlite(v.1.8.7), gmm(v.1.8), cluster(v.2.1.4), R6(v.2.5.1), bslib(v.0.5.1), stringi(v.1.7.12), Rsolnp(v.1.16), rlist(v.0.4.6.2), rpart(v.4.1.19), jquerylib(v.0.1.4), estimability(v.1.4.1), Rcpp(v.1.0.11), zoo(v.1.8-12), tmvmixnorm(v.1.1.1), base64enc(v.0.1-3), pacman(v.0.5.1), splines(v.4.3.1), nnet(v.7.3-19), timechange(v.0.2.0), tidyselect(v.1.2.0), rstudioapi(v.0.15.0), yaml(v.2.3.7), lattice(v.0.21-8), withr(v.2.5.0), coda(v.0.19-4), evaluate(v.0.21), tmvtnorm(v.1.5), foreign(v.0.8-84), survival(v.3.5-5), pillar(v.1.9.0), checkmate(v.2.2.0), stats4(v.4.3.1), generics(v.0.1.3), rprojroot(v.2.0.3), invgamma(v.1.1), mathjaxr(v.1.6-0), truncnorm(v.1.0-9), hms(v.1.1.3), munsell(v.0.5.0), scales(v.1.2.1), xtable(v.1.8-4), glue(v.1.6.2), emmeans(v.1.8.8), Hmisc(v.5.1-0), tools(v.4.3.1), data.table(v.1.14.8), SparseM(v.1.81), matrixNormal(v.0.1.1), mvtnorm(v.1.2-3), grid(v.4.3.1), MCMCpack(v.1.6-3), crosstalk(v.1.2.0), colorspace(v.2.1-0), nlme(v.3.1-162), beeswarm(v.0.4.0), condMVNorm(v.2020.1), vipor(v.0.4.5), htmlTable(v.2.4.1), latex2exp(v.0.9.6), Formula(v.1.2-5), cli(v.3.6.1), fansi(v.1.0.4), gtable(v.0.3.4), sass(v.0.4.7), digest(v.0.6.33), farver(v.2.1.1), htmlwidgets(v.1.6.2), htmltools(v.0.5.6), lifecycle(v.1.0.3) and MASS(v.7.3-60)